1use core::{
2 ffi::c_void,
3 marker::PhantomData,
4 ops::{Index, IndexMut, Range, RangeBounds},
5 ptr::{self, NonNull},
6 slice,
7};
8
9use crate::re::BSTArray::allocator::Allocator;
10
11use super::allocator::BSScrapArrayAllocator;
12
13#[repr(C)]
14#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
15pub struct BSTArrayBase {
16 pub size: u32,
17}
18const_assert_eq!(core::mem::size_of::<BSTArrayBase>(), 0x4);
19
20impl BSTArrayBase {
21 #[inline]
23 pub const fn new() -> Self {
24 Self { size: 0 }
25 }
26}
27
28#[repr(C)]
67pub struct BSScrapArray<T, A = BSScrapArrayAllocator>
68where
69 A: Allocator,
70{
71 __base: A,
72 __base1: BSTArrayBase,
73 _marker: PhantomData<T>,
74}
75const _: () = assert!(core::mem::size_of::<BSScrapArray<u8>>() == 0x20);
76
77impl<T, A> BSScrapArray<T, A>
78where
79 A: Allocator,
80{
81 pub fn new() -> Self {
93 Self { __base: A::new(), __base1: BSTArrayBase::new(), _marker: PhantomData }
94 }
95
96 pub fn with_capacity(capacity: usize) -> Self {
106 let mut allocator = A::new();
107
108 let new_data = unsafe { allocator.allocate(A::ptr_layout(capacity)) };
109 let capacity = capacity as u32;
110 Self::set_allocator_traits(&mut allocator, new_data, capacity);
111
112 Self { __base: allocator, __base1: BSTArrayBase::new(), _marker: PhantomData }
113 }
114
115 #[inline]
127 pub const fn len(&self) -> usize {
128 self.__base1.size as usize
129 }
130
131 #[inline]
141 pub const fn is_empty(&self) -> bool {
142 self.len() == 0
143 }
144
145 #[inline]
157 pub fn capacity(&self) -> usize {
158 self.__base.capacity() as usize
159 }
160
161 #[inline]
177 pub fn shrink_to_fit(&mut self) {
178 let len = self.len();
179 self.change_capacity(len);
180 }
181
182 #[inline]
196 pub fn push(&mut self, value: T) {
197 let size = self.__base1.size;
198 if size == self.__base.capacity() {
199 self.grow();
200 }
201 unsafe {
202 self.as_mut_ptr().add(size as usize).write(value);
203 }
204
205 self.__base1.size += 1;
206 }
207
208 #[inline]
220 pub fn pop(&mut self) -> Option<T> {
221 let len = self.len();
222 if len == 0 {
223 None
224 } else {
225 self.__base1.size -= 1;
226 unsafe { Some(ptr::read(self.as_ptr().add(len - 1))) }
227 }
228 }
229
230 #[inline]
242 pub fn get(&self, index: usize) -> Option<&T> {
243 if index < self.len() {
244 return unsafe { self.as_ptr().add(index).as_ref() };
245 }
246 None
247 }
248
249 #[inline]
263 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
264 if index < self.len() {
265 return unsafe { self.as_mut_ptr().add(index).as_mut() };
266 }
267 None
268 }
269
270 #[inline]
285 pub fn clear(&mut self) {
286 for i in 0..self.len() {
288 unsafe {
289 ptr::drop_in_place(self.as_mut_ptr().add(i));
291 }
292 }
293
294 self.__base1.size = 0; }
296
297 #[inline]
301 pub fn as_ptr(&self) -> *const T {
302 self.__base.as_ptr().cast()
303 }
304
305 #[inline]
307 pub fn as_mut_ptr(&mut self) -> *mut T {
308 self.__base.as_mut_ptr().cast()
309 }
310
311 #[inline]
327 pub fn contains(&self, value: &T) -> bool
328 where
329 T: PartialEq,
330 {
331 for i in 0..self.len() {
332 if let Some(item) = self.get(i) {
333 if item == value {
334 return true;
335 }
336 }
337 }
338 false
339 }
340
341 #[inline]
362 pub fn retain<F>(&mut self, mut f: F)
363 where
364 F: FnMut(&T) -> bool,
365 {
366 let mut retained = 0;
367
368 for i in 0..self.len() {
369 let elem = match unsafe { self.as_ptr().add(i).as_ref() } {
370 Some(elem) => elem,
371 None => continue,
372 };
373
374 if f(elem) {
375 if retained != i {
376 unsafe {
377 let src = self.as_ptr().add(i);
378 let dst = self.as_mut_ptr().add(retained);
379 ptr::copy_nonoverlapping(src, dst, 1);
380 }
381 }
382 retained += 1;
383 } else {
384 unsafe { ptr::drop_in_place(self.as_mut_ptr().add(i)) };
386 }
387 }
388
389 self.__base1.size = retained as u32;
390 }
391
392 #[inline]
411 pub fn resize(&mut self, new_size: usize, value: T)
412 where
413 T: Clone,
414 {
415 let prev_size = self.len();
416 if new_size > prev_size {
417 for _ in prev_size..new_size {
418 self.push(value.clone());
419 }
420 } else {
421 for i in new_size..prev_size {
422 unsafe { ptr::drop_in_place(self.as_mut_ptr().add(i)) };
423 }
424 }
425 self.__base1.size = new_size as u32;
426 }
427
428 #[inline]
455 pub fn drain<R>(&mut self, range: R) -> BSTDrain<'_, T, A>
456 where
457 R: RangeBounds<usize>,
458 {
459 let len = self.len();
460 let Range { start, end } = stdx::slice::range(range, ..len);
461 debug_assert!(start <= end);
462 debug_assert!(end <= len);
463
464 self.__base1.size = start as u32;
467
468 BSTDrain {
469 iter: unsafe { core::slice::from_raw_parts(self.as_ptr().add(start), end - start) }
470 .iter(),
471 tail_start: end,
472 tail_len: len - end,
473 array: unsafe { NonNull::new_unchecked(self as *mut Self) },
474 }
475 }
476
477 #[inline]
479 pub fn as_slice(&self) -> &[T] {
480 let ptr = self.as_ptr();
481 let len = self.len();
482
483 if ptr.is_null() || (len == 0) {
484 return &[];
485 }
486 unsafe { slice::from_raw_parts(ptr, len) }
487 }
488
489 #[inline]
491 pub fn as_mut_slice(&mut self) -> &mut [T] {
492 let ptr = self.as_mut_ptr();
493 let len = self.len();
494
495 if ptr.is_null() || (len == 0) {
496 return &mut [];
497 }
498 unsafe { slice::from_raw_parts_mut(ptr, len) }
499 }
500
501 #[inline]
517 pub const fn iter(&self) -> BSTArrayIterator<'_, T, A> {
518 BSTArrayIterator { array: self, index: 0 }
519 }
520
521 fn grow(&mut self) {
522 const MIN_CAPACITY: usize = 4;
523 const GROWTH_FACTOR: usize = 2;
524
525 let old_capacity = self.capacity();
526 let new_capacity =
527 if old_capacity == 0 { MIN_CAPACITY } else { old_capacity * GROWTH_FACTOR };
528 self.change_capacity(new_capacity);
529 }
530
531 fn change_capacity(&mut self, new_capacity: usize) {
532 let new_data = if new_capacity > 0 {
533 let layout = A::ptr_layout(new_capacity);
534 unsafe { self.__base.allocate(layout).cast::<T>() }
535 } else {
536 ptr::null_mut()
537 };
538
539 let old_data = self.__base.as_mut_ptr().cast::<T>();
540 if !old_data.is_null() {
541 let old_capacity = self.capacity();
542 if !new_data.is_null() {
543 let copy_count = core::cmp::min(old_capacity, new_capacity);
544 unsafe { ptr::copy_nonoverlapping(old_data, new_data, copy_count) };
546 }
547
548 unsafe { self.__base.deallocate(old_data.cast()) };
549 }
550
551 Self::set_allocator_traits(&mut self.__base, new_data.cast(), new_capacity as u32);
552 }
553
554 fn set_allocator_traits(allocator: &mut A, data: *mut c_void, capacity: u32) {
555 allocator.set_allocator_traits(data, capacity, core::mem::size_of::<T>());
556 }
557}
558
559impl<T, A> Index<usize> for BSScrapArray<T, A>
560where
561 A: Allocator,
562{
563 type Output = T;
564
565 #[inline]
566 fn index(&self, index: usize) -> &Self::Output {
567 assert!(index < self.len(), "Index out of bounds");
568 unsafe { &*self.as_ptr().add(index) }
569 }
570}
571
572impl<T, A> IndexMut<usize> for BSScrapArray<T, A>
573where
574 A: Allocator,
575{
576 #[inline]
577 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
578 assert!(index < self.len(), "Index out of bounds");
579 unsafe { &mut *self.as_mut_ptr().add(index) }
580 }
581}
582
583pub struct BSTDrain<'a, T, A>
588where
589 A: Allocator,
590{
591 tail_start: usize, tail_len: usize, iter: core::slice::Iter<'a, T>,
594 array: NonNull<BSScrapArray<T, A>>,
595}
596
597impl<T, A> Iterator for BSTDrain<'_, T, A>
598where
599 A: Allocator,
600{
601 type Item = T;
602
603 #[inline]
604 fn next(&mut self) -> Option<Self::Item> {
605 self.iter.next().map(|item| unsafe { ptr::read(item) })
606 }
607
608 #[inline]
609 fn size_hint(&self) -> (usize, Option<usize>) {
610 self.iter.size_hint()
611 }
612}
613
614impl<T, A> DoubleEndedIterator for BSTDrain<'_, T, A>
615where
616 A: Allocator,
617{
618 #[inline]
619 fn next_back(&mut self) -> Option<Self::Item> {
620 self.iter.next_back().map(|item| unsafe { ptr::read(item) })
621 }
622}
623
624impl<T, A> ExactSizeIterator for BSTDrain<'_, T, A>
625where
626 A: Allocator,
627{
628 #[inline]
629 fn len(&self) -> usize {
630 self.iter.len()
631 }
632}
633
634impl<T, A: Allocator> Drop for BSTDrain<'_, T, A> {
635 fn drop(&mut self) {
636 if core::mem::needs_drop::<T>() {
641 self.for_each(drop);
642 }
643
644 if self.tail_len > 0 {
646 unsafe {
647 let array = self.array.as_mut();
648
649 let start = array.len();
650 let tail = self.tail_start;
651 if tail != start {
652 let ptr = array.as_mut_ptr();
653 let src = ptr.add(tail);
654 let dst = ptr.add(start);
655 ptr::copy(src, dst, self.tail_len);
656 }
657 array.__base1.size = (start + self.tail_len) as u32;
658 }
659 }
660 }
661}
662
663pub struct BSTArrayIterator<'a, T, A>
666where
667 A: Allocator,
668{
669 array: &'a BSScrapArray<T, A>,
670 index: usize,
671}
672
673impl<'a, T, A> Iterator for BSTArrayIterator<'a, T, A>
674where
675 A: Allocator,
676{
677 type Item = &'a T;
678
679 #[inline]
680 fn next(&mut self) -> Option<Self::Item> {
681 if self.index < self.array.len() {
682 let item = unsafe { &*self.array.as_ptr().add(self.index) };
683 self.index += 1;
684 Some(item)
685 } else {
686 None
687 }
688 }
689}
690
691pub struct BSTArrayIntoIterator<T, A>
692where
693 A: Allocator,
694{
695 array: BSScrapArray<T, A>,
696 index: usize,
697}
698
699impl<T, A> Iterator for BSTArrayIntoIterator<T, A>
700where
701 A: Allocator,
702{
703 type Item = T;
704
705 #[inline]
706 fn next(&mut self) -> Option<Self::Item> {
707 if self.index < self.array.len() {
708 let item = unsafe { ptr::read(self.array.as_ptr().add(self.index)) };
709 self.index += 1;
710 Some(item)
711 } else {
712 None
713 }
714 }
715}
716
717impl<T, A> IntoIterator for BSScrapArray<T, A>
718where
719 A: Allocator,
720{
721 type Item = T;
722 type IntoIter = BSTArrayIntoIterator<T, A>;
723
724 #[inline]
725 fn into_iter(self) -> Self::IntoIter {
726 BSTArrayIntoIterator { array: self, index: 0 }
727 }
728}
729
730impl<'a, T, A> IntoIterator for &'a BSScrapArray<T, A>
731where
732 A: Allocator,
733{
734 type Item = &'a T;
735 type IntoIter = BSTArrayIterator<'a, T, A>;
736
737 #[inline]
738 fn into_iter(self) -> Self::IntoIter {
739 BSTArrayIterator { array: self, index: 0 }
740 }
741}
742
743pub struct BSTArrayIterMut<'a, T, A>
744where
745 A: Allocator,
746{
747 array: &'a mut BSScrapArray<T, A>,
748 index: usize,
749}
750
751impl<'a, T, A> Iterator for BSTArrayIterMut<'a, T, A>
752where
753 A: Allocator,
754{
755 type Item = &'a mut T;
756
757 #[inline]
758 fn next(&mut self) -> Option<Self::Item> {
759 if self.index < self.array.len() {
760 unsafe {
761 let ptr = self.array.as_mut_ptr().add(self.index);
762 self.index += 1;
763 Some(&mut *ptr)
764 }
765 } else {
766 None
767 }
768 }
769
770 #[inline]
771 fn size_hint(&self) -> (usize, Option<usize>) {
772 let len = self.array.len();
773 (len, Some(len))
774 }
775}
776
777impl<'a, T, A> IntoIterator for &'a mut BSScrapArray<T, A>
778where
779 A: Allocator,
780{
781 type Item = &'a mut T;
782 type IntoIter = BSTArrayIterMut<'a, T, A>;
783
784 #[inline]
785 fn into_iter(self) -> Self::IntoIter {
786 BSTArrayIterMut { array: self, index: 0 }
787 }
788}
789
790impl<T, A> Extend<T> for BSScrapArray<T, A>
791where
792 A: Allocator,
793{
794 #[inline]
795 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
796 for elem in iter {
797 self.push(elem);
798 }
799 }
800}
801
802impl<T, A> core::fmt::Debug for BSScrapArray<T, A>
806where
807 T: core::fmt::Debug,
808 A: Allocator,
809{
810 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
811 f.debug_list().entries(self.as_slice()).finish()
812 }
813}
814
815impl<T, A> Default for BSScrapArray<T, A>
816where
817 A: Allocator,
818{
819 #[inline]
820 fn default() -> Self {
821 Self::new()
822 }
823}
824
825impl<T, A> Clone for BSScrapArray<T, A>
826where
827 A: Allocator + Clone,
828{
829 #[inline]
830 fn clone(&self) -> Self {
831 if self.__base.capacity() == 0 {
832 return Self::new();
833 }
834
835 Self {
836 __base: self.__base.clone(),
837 __base1: BSTArrayBase { size: self.__base1.size },
838 _marker: PhantomData,
839 }
840 }
841}
842
843impl<T, A> PartialEq for BSScrapArray<T, A>
844where
845 T: PartialEq,
846 A: Allocator,
847{
848 #[inline]
849 fn eq(&self, other: &Self) -> bool {
850 self.as_slice() == other.as_slice()
851 }
852}
853impl<T, A> PartialEq<Vec<T>> for BSScrapArray<T, A>
854where
855 T: PartialEq,
856 A: Allocator,
857{
858 #[inline]
859 fn eq(&self, other: &Vec<T>) -> bool {
860 self.as_slice() == *other
861 }
862}
863
864impl<T, A> PartialEq<&[T]> for BSScrapArray<T, A>
865where
866 T: PartialEq,
867 A: Allocator,
868{
869 #[inline]
870 fn eq(&self, other: &&[T]) -> bool {
871 self.as_slice() == *other
872 }
873}
874
875impl<T, A> Eq for BSScrapArray<T, A>
876where
877 T: Eq,
878 A: Allocator,
879{
880}
881
882impl<T, A> PartialOrd for BSScrapArray<T, A>
883where
884 T: PartialOrd,
885 A: Allocator,
886{
887 #[inline]
888 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
889 self.as_slice().partial_cmp(other.as_slice())
890 }
891}
892
893impl<T, A> Ord for BSScrapArray<T, A>
894where
895 T: Ord,
896 A: Allocator,
897{
898 #[inline]
899 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
900 self.as_slice().cmp(other.as_slice())
901 }
902}
903
904impl<T, A> core::hash::Hash for BSScrapArray<T, A>
905where
906 T: core::hash::Hash,
907 A: Allocator,
908{
909 #[inline]
910 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
911 self.as_slice().hash(state);
912 }
913}